home
***
CD-ROM
|
disk
|
FTP
|
other
***
search
/
Chip 2007 January, February, March & April
/
Chip-Cover-CD-2007-02.iso
/
Pakiet bezpieczenstwa
/
mini Pentoo LiveCD 2006.1
/
mpentoo-2006.1.iso
/
livecd.squashfs
/
usr
/
lib
/
python2.4
/
urllib.pyo
(
.txt
)
< prev
next >
Wrap
Python Compiled Bytecode
|
2005-10-18
|
44KB
|
1,662 lines
# Source Generated with Decompyle++
# File: in.pyo (Python 2.4)
'''Open an arbitrary URL.
See the following document for more info on URLs:
"Names and Addresses, URIs, URLs, URNs, URCs", at
http://www.w3.org/pub/WWW/Addressing/Overview.html
See also the HTTP spec (from which the error codes are derived):
"HTTP - Hypertext Transfer Protocol", at
http://www.w3.org/pub/WWW/Protocols/
Related standards and specs:
- RFC1808: the "relative URL" spec. (authoritative status)
- RFC1738 - the "URL standard". (authoritative status)
- RFC1630 - the "URI spec". (informational status)
The object returned by URLopener().open(file) will differ per
protocol. All you know is that is has methods read(), readline(),
readlines(), fileno(), close() and info(). The read*(), fileno()
and close() methods work like those of open files.
The info() method returns a mimetools.Message object which can be
used to query various info about the object, if available.
(mimetools.Message objects are queried with the getheader() method.)
'''
import string
import socket
import os
import time
import sys
from urlparse import urljoin as basejoin
__all__ = [
'urlopen',
'URLopener',
'FancyURLopener',
'urlretrieve',
'urlcleanup',
'quote',
'quote_plus',
'unquote',
'unquote_plus',
'urlencode',
'url2pathname',
'pathname2url',
'splittag',
'localhost',
'thishost',
'ftperrors',
'basejoin',
'unwrap',
'splittype',
'splithost',
'splituser',
'splitpasswd',
'splitport',
'splitnport',
'splitquery',
'splitattr',
'splitvalue',
'splitgophertype',
'getproxies']
__version__ = '1.16'
MAXFTPCACHE = 10
if os.name == 'mac':
from macurl2path import url2pathname, pathname2url
elif os.name == 'nt':
from nturl2path import url2pathname, pathname2url
elif os.name == 'riscos':
from rourl2path import url2pathname, pathname2url
else:
def url2pathname(pathname):
return unquote(pathname)
def pathname2url(pathname):
return quote(pathname)
_urlopener = None
def urlopen(url, data = None, proxies = None):
'''urlopen(url [, data]) -> open file-like object'''
global _urlopener
if proxies is not None:
opener = FancyURLopener(proxies = proxies)
elif not _urlopener:
opener = FancyURLopener()
_urlopener = opener
else:
opener = _urlopener
if data is None:
return opener.open(url)
else:
return opener.open(url, data)
def urlretrieve(url, filename = None, reporthook = None, data = None):
global _urlopener
if not _urlopener:
_urlopener = FancyURLopener()
return _urlopener.retrieve(url, filename, reporthook, data)
def urlcleanup():
if _urlopener:
_urlopener.cleanup()
class ContentTooShortError(IOError):
def __init__(self, message, content):
IOError.__init__(self, message)
self.content = content
ftpcache = { }
class URLopener:
"""Class to open URLs.
This is a class rather than just a subroutine because we may need
more than one set of global protocol-specific options.
Note -- this is a base class for those who don't want the
automatic handling of errors type 302 (relocated) and 401
(authorization needed)."""
__tempfiles = None
version = 'Python-urllib/%s' % __version__
def __init__(self, proxies = None, **x509):
if proxies is None:
proxies = getproxies()
self.proxies = proxies
self.key_file = x509.get('key_file')
self.cert_file = x509.get('cert_file')
self.addheaders = [
('User-agent', self.version)]
self._URLopener__tempfiles = []
self._URLopener__unlink = os.unlink
self.tempcache = None
self.ftpcache = ftpcache
def __del__(self):
self.close()
def close(self):
self.cleanup()
def cleanup(self):
if self._URLopener__tempfiles:
for file in self._URLopener__tempfiles:
try:
self._URLopener__unlink(file)
continue
except OSError:
continue
del self._URLopener__tempfiles[:]
if self.tempcache:
self.tempcache.clear()
def addheader(self, *args):
"""Add a header to be used by the HTTP interface only
e.g. u.addheader('Accept', 'sound/basic')"""
self.addheaders.append(args)
def open(self, fullurl, data = None):
"""Use URLopener().open(file) instead of open(file, 'r')."""
fullurl = unwrap(toBytes(fullurl))
if self.tempcache and fullurl in self.tempcache:
(filename, headers) = self.tempcache[fullurl]
fp = open(filename, 'rb')
return addinfourl(fp, headers, fullurl)
(urltype, url) = splittype(fullurl)
if not urltype:
urltype = 'file'
if urltype in self.proxies:
proxy = self.proxies[urltype]
(urltype, proxyhost) = splittype(proxy)
(host, selector) = splithost(proxyhost)
url = (host, fullurl)
else:
proxy = None
name = 'open_' + urltype
self.type = urltype
name = name.replace('-', '_')
if not hasattr(self, name):
if proxy:
return self.open_unknown_proxy(proxy, fullurl, data)
else:
return self.open_unknown(fullurl, data)
try:
if data is None:
return getattr(self, name)(url)
else:
return getattr(self, name)(url, data)
except socket.error:
msg = None
raise IOError, ('socket error', msg), sys.exc_info()[2]
def open_unknown(self, fullurl, data = None):
'''Overridable interface to open unknown URL type.'''
(type, url) = splittype(fullurl)
raise IOError, ('url error', 'unknown url type', type)
def open_unknown_proxy(self, proxy, fullurl, data = None):
'''Overridable interface to open unknown URL type.'''
(type, url) = splittype(fullurl)
raise IOError, ('url error', 'invalid proxy for %s' % type, proxy)
def retrieve(self, url, filename = None, reporthook = None, data = None):
'''retrieve(url) returns (filename, headers) for a local object
or (tempfilename, headers) for a remote object.'''
url = unwrap(toBytes(url))
if self.tempcache and url in self.tempcache:
return self.tempcache[url]
(type, url1) = splittype(url)
if filename is None:
if not type or type == 'file':
try:
fp = self.open_local_file(url1)
hdrs = fp.info()
del fp
return (url2pathname(splithost(url1)[1]), hdrs)
except IOError:
msg = None
except:
None<EXCEPTION MATCH>IOError
None<EXCEPTION MATCH>IOError
fp = self.open(url, data)
headers = fp.info()
if filename:
tfp = open(filename, 'wb')
else:
import tempfile as tempfile
(garbage, path) = splittype(url)
if not path:
pass
(garbage, path) = splithost('')
if not path:
pass
(path, garbage) = splitquery('')
if not path:
pass
(path, garbage) = splitattr('')
suffix = os.path.splitext(path)[1]
(fd, filename) = tempfile.mkstemp(suffix)
self._URLopener__tempfiles.append(filename)
tfp = os.fdopen(fd, 'wb')
result = (filename, headers)
if self.tempcache is not None:
self.tempcache[url] = result
bs = 1024 * 8
size = -1
read = 0
blocknum = 0
if reporthook:
if 'content-length' in headers:
size = int(headers['Content-Length'])
reporthook(blocknum, bs, size)
while None:
block = fp.read(bs)
if block == '':
break
read += len(block)
blocknum += 1
if reporthook:
reporthook(blocknum, bs, size)
continue
fp.close()
tfp.close()
del fp
del tfp
if size >= 0 and read < size:
raise ContentTooShortError('retrieval incomplete: got only %i out of %i bytes' % (read, size), result)
return result
def open_http(self, url, data = None):
'''Use HTTP protocol.'''
import httplib as httplib
user_passwd = None
if isinstance(url, str):
(host, selector) = splithost(url)
if host:
(user_passwd, host) = splituser(host)
host = unquote(host)
realhost = host
else:
(host, selector) = url
(urltype, rest) = splittype(selector)
url = rest
user_passwd = None
if urltype.lower() != 'http':
realhost = None
else:
(realhost, rest) = splithost(rest)
if realhost:
(user_passwd, realhost) = splituser(realhost)
if user_passwd:
selector = '%s://%s%s' % (urltype, realhost, rest)
if proxy_bypass(realhost):
host = realhost
if not host:
raise IOError, ('http error', 'no host given')
if user_passwd:
import base64 as base64
auth = base64.encodestring(user_passwd).strip()
else:
auth = None
h = httplib.HTTP(host)
if data is not None:
h.putrequest('POST', selector)
h.putheader('Content-type', 'application/x-www-form-urlencoded')
h.putheader('Content-length', '%d' % len(data))
else:
h.putrequest('GET', selector)
if auth:
h.putheader('Authorization', 'Basic %s' % auth)
if realhost:
h.putheader('Host', realhost)
for args in self.addheaders:
h.putheader(*args)
h.endheaders()
if data is not None:
h.send(data)
(errcode, errmsg, headers) = h.getreply()
fp = h.getfile()
if errcode == 200:
return addinfourl(fp, headers, 'http:' + url)
elif data is None:
return self.http_error(url, fp, errcode, errmsg, headers)
else:
return self.http_error(url, fp, errcode, errmsg, headers, data)
def http_error(self, url, fp, errcode, errmsg, headers, data = None):
'''Handle http errors.
Derived class can override this, or provide specific handlers
named http_error_DDD where DDD is the 3-digit error code.'''
name = 'http_error_%d' % errcode
if hasattr(self, name):
method = getattr(self, name)
if data is None:
result = method(url, fp, errcode, errmsg, headers)
else:
result = method(url, fp, errcode, errmsg, headers, data)
if result:
return result
return self.http_error_default(url, fp, errcode, errmsg, headers)
def http_error_default(self, url, fp, errcode, errmsg, headers):
'''Default error handler: close the connection and raise IOError.'''
void = fp.read()
fp.close()
raise IOError, ('http error', errcode, errmsg, headers)
if hasattr(socket, 'ssl'):
def open_https(self, url, data = None):
'''Use HTTPS protocol.'''
import httplib
user_passwd = None
if isinstance(url, str):
(host, selector) = splithost(url)
if host:
(user_passwd, host) = splituser(host)
host = unquote(host)
realhost = host
else:
(host, selector) = url
(urltype, rest) = splittype(selector)
url = rest
user_passwd = None
if urltype.lower() != 'https':
realhost = None
else:
(realhost, rest) = splithost(rest)
if realhost:
(user_passwd, realhost) = splituser(realhost)
if user_passwd:
selector = '%s://%s%s' % (urltype, realhost, rest)
if not host:
raise IOError, ('https error', 'no host given')
if user_passwd:
import base64
auth = base64.encodestring(user_passwd).strip()
else:
auth = None
h = httplib.HTTPS(host, 0, key_file = self.key_file, cert_file = self.cert_file)
if data is not None:
h.putrequest('POST', selector)
h.putheader('Content-type', 'application/x-www-form-urlencoded')
h.putheader('Content-length', '%d' % len(data))
else:
h.putrequest('GET', selector)
if auth:
h.putheader('Authorization', 'Basic %s' % auth)
if realhost:
h.putheader('Host', realhost)
for args in self.addheaders:
h.putheader(*args)
h.endheaders()
if data is not None:
h.send(data)
(errcode, errmsg, headers) = h.getreply()
fp = h.getfile()
if errcode == 200:
return addinfourl(fp, headers, 'https:' + url)
elif data is None:
return self.http_error(url, fp, errcode, errmsg, headers)
else:
return self.http_error(url, fp, errcode, errmsg, headers, data)
def open_gopher(self, url):
'''Use Gopher protocol.'''
import gopherlib as gopherlib
(host, selector) = splithost(url)
if not host:
raise IOError, ('gopher error', 'no host given')
host = unquote(host)
(type, selector) = splitgophertype(selector)
(selector, query) = splitquery(selector)
selector = unquote(selector)
if query:
query = unquote(query)
fp = gopherlib.send_query(selector, query, host)
else:
fp = gopherlib.send_selector(selector, host)
return addinfourl(fp, noheaders(), 'gopher:' + url)
def open_file(self, url):
'''Use local file or FTP depending on form of URL.'''
if url[:2] == '//' and url[2:3] != '/' and url[2:12].lower() != 'localhost/':
return self.open_ftp(url)
else:
return self.open_local_file(url)
def open_local_file(self, url):
'''Use local file.'''
import mimetypes as mimetypes
import mimetools as mimetools
import email.Utils as email
try:
StringIO = StringIO
import cStringIO
except ImportError:
StringIO = StringIO
import StringIO
(host, file) = splithost(url)
localname = url2pathname(file)
try:
stats = os.stat(localname)
except OSError:
e = None
raise IOError(e.errno, e.strerror, e.filename)
size = stats.st_size
modified = email.Utils.formatdate(stats.st_mtime, usegmt = True)
mtype = mimetypes.guess_type(url)[0]
if not mtype:
pass
headers = mimetools.Message(StringIO('Content-Type: %s\nContent-Length: %d\nLast-modified: %s\n' % ('text/plain', size, modified)))
if not host:
urlfile = file
if file[:1] == '/':
urlfile = 'file://' + file
return addinfourl(open(localname, 'rb'), headers, urlfile)
(host, port) = splitport(host)
if not port and socket.gethostbyname(host) in (localhost(), thishost()):
urlfile = file
if file[:1] == '/':
urlfile = 'file://' + file
return addinfourl(open(localname, 'rb'), headers, urlfile)
raise IOError, ('local file error', 'not on local host')
def open_ftp(self, url):
'''Use FTP protocol.'''
import mimetypes
import mimetools
try:
StringIO = StringIO
import cStringIO
except ImportError:
StringIO = StringIO
import StringIO
(host, path) = splithost(url)
if not host:
raise IOError, ('ftp error', 'no host given')
(host, port) = splitport(host)
(user, host) = splituser(host)
if user:
(user, passwd) = splitpasswd(user)
else:
passwd = None
host = unquote(host)
if not user:
pass
user = unquote('')
if not passwd:
pass
passwd = unquote('')
host = socket.gethostbyname(host)
if not port:
import ftplib as ftplib
port = ftplib.FTP_PORT
else:
port = int(port)
(path, attrs) = splitattr(path)
path = unquote(path)
dirs = path.split('/')
dirs = dirs[:-1]
file = dirs[-1]
if dirs and not dirs[0]:
dirs = dirs[1:]
if dirs and not dirs[0]:
dirs[0] = '/'
key = (user, host, port, '/'.join(dirs))
if len(self.ftpcache) > MAXFTPCACHE:
for k in self.ftpcache.keys():
if k != key:
v = self.ftpcache[k]
del self.ftpcache[k]
v.close()
continue
try:
if key not in self.ftpcache:
self.ftpcache[key] = ftpwrapper(user, passwd, host, port, dirs)
if not file:
type = 'D'
else:
type = 'I'
for attr in attrs:
(attr, value) = splitvalue(attr)
if attr.lower() == 'type' and value in ('a', 'A', 'i', 'I', 'd', 'D'):
type = value.upper()
continue
(fp, retrlen) = self.ftpcache[key].retrfile(file, type)
mtype = mimetypes.guess_type('ftp:' + url)[0]
headers = ''
if mtype:
headers += 'Content-Type: %s\n' % mtype
if retrlen is not None and retrlen >= 0:
headers += 'Content-Length: %d\n' % retrlen
headers = mimetools.Message(StringIO(headers))
return addinfourl(fp, headers, 'ftp:' + url)
except ftperrors():
msg = None
raise IOError, ('ftp error', msg), sys.exc_info()[2]
def open_data(self, url, data = None):
'''Use "data" URL.'''
import mimetools
try:
StringIO = StringIO
import cStringIO
except ImportError:
StringIO = StringIO
import StringIO
try:
(type, data) = url.split(',', 1)
except ValueError:
raise IOError, ('data error', 'bad data URL')
if not type:
type = 'text/plain;charset=US-ASCII'
semi = type.rfind(';')
if semi >= 0 and '=' not in type[semi:]:
encoding = type[semi + 1:]
type = type[:semi]
else:
encoding = ''
msg = []
msg.append('Date: %s' % time.strftime('%a, %d %b %Y %T GMT', time.gmtime(time.time())))
msg.append('Content-type: %s' % type)
if encoding == 'base64':
import base64
data = base64.decodestring(data)
else:
data = unquote(data)
msg.append('Content-length: %d' % len(data))
msg.append('')
msg.append(data)
msg = '\n'.join(msg)
f = StringIO(msg)
headers = mimetools.Message(f, 0)
f.fileno = None
return addinfourl(f, headers, url)
class FancyURLopener(URLopener):
'''Derived class with handlers for errors we can handle (perhaps).'''
def __init__(self, *args, **kwargs):
URLopener.__init__(self, *args, **kwargs)
self.auth_cache = { }
self.tries = 0
self.maxtries = 10
def http_error_default(self, url, fp, errcode, errmsg, headers):
"""Default error handling -- don't raise an exception."""
return addinfourl(fp, headers, 'http:' + url)
def http_error_302(self, url, fp, errcode, errmsg, headers, data = None):
'''Error 302 -- relocated (temporarily).'''
self.tries += 1
result = self.redirect_internal(url, fp, errcode, errmsg, headers, data)
self.tries = 0
return result
def redirect_internal(self, url, fp, errcode, errmsg, headers, data):
if 'location' in headers:
newurl = headers['location']
elif 'uri' in headers:
newurl = headers['uri']
else:
return None
void = fp.read()
fp.close()
newurl = basejoin(self.type + ':' + url, newurl)
return self.open(newurl)
def http_error_301(self, url, fp, errcode, errmsg, headers, data = None):
'''Error 301 -- also relocated (permanently).'''
return self.http_error_302(url, fp, errcode, errmsg, headers, data)
def http_error_303(self, url, fp, errcode, errmsg, headers, data = None):
'''Error 303 -- also relocated (essentially identical to 302).'''
return self.http_error_302(url, fp, errcode, errmsg, headers, data)
def http_error_307(self, url, fp, errcode, errmsg, headers, data = None):
'''Error 307 -- relocated, but turn POST into error.'''
if data is None:
return self.http_error_302(url, fp, errcode, errmsg, headers, data)
else:
return self.http_error_default(url, fp, errcode, errmsg, headers)
def http_error_401(self, url, fp, errcode, errmsg, headers, data = None):
'''Error 401 -- authentication required.
See this URL for a description of the basic authentication scheme:
http://www.ics.uci.edu/pub/ietf/http/draft-ietf-http-v10-spec-00.txt'''
if 'www-authenticate' not in headers:
URLopener.http_error_default(self, url, fp, errcode, errmsg, headers)
stuff = headers['www-authenticate']
import re as re
match = re.match('[ \t]*([^ \t]+)[ \t]+realm="([^"]*)"', stuff)
if not match:
URLopener.http_error_default(self, url, fp, errcode, errmsg, headers)
(scheme, realm) = match.groups()
if scheme.lower() != 'basic':
URLopener.http_error_default(self, url, fp, errcode, errmsg, headers)
name = 'retry_' + self.type + '_basic_auth'
if data is None:
return getattr(self, name)(url, realm)
else:
return getattr(self, name)(url, realm, data)
def retry_http_basic_auth(self, url, realm, data = None):
(host, selector) = splithost(url)
i = host.find('@') + 1
host = host[i:]
(user, passwd) = self.get_user_passwd(host, realm, i)
if not user or passwd:
return None
host = quote(user, safe = '') + ':' + quote(passwd, safe = '') + '@' + host
newurl = 'http://' + host + selector
if data is None:
return self.open(newurl)
else:
return self.open(newurl, data)
def retry_https_basic_auth(self, url, realm, data = None):
(host, selector) = splithost(url)
i = host.find('@') + 1
host = host[i:]
(user, passwd) = self.get_user_passwd(host, realm, i)
if not user or passwd:
return None
host = quote(user, safe = '') + ':' + quote(passwd, safe = '') + '@' + host
newurl = '//' + host + selector
return self.open_https(newurl, data)
def get_user_passwd(self, host, realm, clear_cache = 0):
key = realm + '@' + host.lower()
if key in self.auth_cache:
if clear_cache:
del self.auth_cache[key]
else:
return self.auth_cache[key]
(user, passwd) = self.prompt_user_passwd(host, realm)
if user or passwd:
self.auth_cache[key] = (user, passwd)
return (user, passwd)
def prompt_user_passwd(self, host, realm):
'''Override this in a GUI environment!'''
import getpass as getpass
try:
user = raw_input('Enter username for %s at %s: ' % (realm, host))
passwd = getpass.getpass('Enter password for %s in %s at %s: ' % (user, realm, host))
return (user, passwd)
except KeyboardInterrupt:
print
return (None, None)
_localhost = None
def localhost():
"""Return the IP address of the magic hostname 'localhost'."""
global _localhost
if _localhost is None:
_localhost = socket.gethostbyname('localhost')
return _localhost
_thishost = None
def thishost():
'''Return the IP address of the current host.'''
global _thishost
if _thishost is None:
_thishost = socket.gethostbyname(socket.gethostname())
return _thishost
_ftperrors = None
def ftperrors():
'''Return the set of errors raised by the FTP class.'''
global _ftperrors
if _ftperrors is None:
import ftplib
_ftperrors = ftplib.all_errors
return _ftperrors
_noheaders = None
def noheaders():
'''Return an empty mimetools.Message object.'''
global _noheaders
if _noheaders is None:
import mimetools
try:
StringIO = StringIO
import cStringIO
except ImportError:
StringIO = StringIO
import StringIO
_noheaders = mimetools.Message(StringIO(), 0)
_noheaders.fp.close()
return _noheaders
class ftpwrapper:
'''Class used by open_ftp() for cache of open FTP connections.'''
def __init__(self, user, passwd, host, port, dirs):
self.user = user
self.passwd = passwd
self.host = host
self.port = port
self.dirs = dirs
self.init()
def init(self):
import ftplib
self.busy = 0
self.ftp = ftplib.FTP()
self.ftp.connect(self.host, self.port)
self.ftp.login(self.user, self.passwd)
for dir in self.dirs:
self.ftp.cwd(dir)
def retrfile(self, file, type):
import ftplib
self.endtransfer()
if type in ('d', 'D'):
cmd = 'TYPE A'
isdir = 1
else:
cmd = 'TYPE ' + type
isdir = 0
try:
self.ftp.voidcmd(cmd)
except ftplib.all_errors:
self.init()
self.ftp.voidcmd(cmd)
conn = None
if file and not isdir:
try:
self.ftp.nlst(file)
except ftplib.error_perm:
reason = None
raise IOError, ('ftp error', reason), sys.exc_info()[2]
self.ftp.voidcmd(cmd)
try:
cmd = 'RETR ' + file
conn = self.ftp.ntransfercmd(cmd)
except ftplib.error_perm:
reason = None
if str(reason)[:3] != '550':
raise IOError, ('ftp error', reason), sys.exc_info()[2]
except:
str(reason)[:3] != '550'
None<EXCEPTION MATCH>ftplib.error_perm
if not conn:
self.ftp.voidcmd('TYPE A')
if file:
cmd = 'LIST ' + file
else:
cmd = 'LIST'
conn = self.ftp.ntransfercmd(cmd)
self.busy = 1
return (addclosehook(conn[0].makefile('rb'), self.endtransfer), conn[1])
def endtransfer(self):
if not self.busy:
return None
self.busy = 0
try:
self.ftp.voidresp()
except ftperrors():
pass
def close(self):
self.endtransfer()
try:
self.ftp.close()
except ftperrors():
pass
class addbase:
'''Base class for addinfo and addclosehook.'''
def __init__(self, fp):
self.fp = fp
self.read = self.fp.read
self.readline = self.fp.readline
if hasattr(self.fp, 'readlines'):
self.readlines = self.fp.readlines
if hasattr(self.fp, 'fileno'):
self.fileno = self.fp.fileno
if hasattr(self.fp, '__iter__'):
self.__iter__ = self.fp.__iter__
if hasattr(self.fp, 'next'):
self.next = self.fp.next
def __repr__(self):
return '<%s at %r whose fp = %r>' % (self.__class__.__name__, id(self), self.fp)
def close(self):
self.read = None
self.readline = None
self.readlines = None
self.fileno = None
if self.fp:
self.fp.close()
self.fp = None
class addclosehook(addbase):
'''Class to add a close hook to an open file.'''
def __init__(self, fp, closehook, *hookargs):
addbase.__init__(self, fp)
self.closehook = closehook
self.hookargs = hookargs
def close(self):
addbase.close(self)
if self.closehook:
self.closehook(*self.hookargs)
self.closehook = None
self.hookargs = None
class addinfo(addbase):
'''class to add an info() method to an open file.'''
def __init__(self, fp, headers):
addbase.__init__(self, fp)
self.headers = headers
def info(self):
return self.headers
class addinfourl(addbase):
'''class to add info() and geturl() methods to an open file.'''
def __init__(self, fp, headers, url):
addbase.__init__(self, fp)
self.headers = headers
self.url = url
def info(self):
return self.headers
def geturl(self):
return self.url
try:
unicode
except NameError:
def _is_unicode(x):
return 0
def _is_unicode(x):
return isinstance(x, unicode)
def toBytes(url):
'''toBytes(u"URL") --> \'URL\'.'''
if _is_unicode(url):
try:
url = url.encode('ASCII')
except UnicodeError:
raise UnicodeError('URL ' + repr(url) + ' contains non-ASCII characters')
except:
None<EXCEPTION MATCH>UnicodeError
None<EXCEPTION MATCH>UnicodeError
return url
def unwrap(url):
"""unwrap('<URL:type://host/path>') --> 'type://host/path'."""
url = url.strip()
if url[:1] == '<' and url[-1:] == '>':
url = url[1:-1].strip()
if url[:4] == 'URL:':
url = url[4:].strip()
return url
_typeprog = None
def splittype(url):
"""splittype('type:opaquestring') --> 'type', 'opaquestring'."""
global _typeprog
if _typeprog is None:
import re
_typeprog = re.compile('^([^/:]+):')
match = _typeprog.match(url)
if match:
scheme = match.group(1)
return (scheme.lower(), url[len(scheme) + 1:])
return (None, url)
_hostprog = None
def splithost(url):
"""splithost('//host[:port]/path') --> 'host[:port]', '/path'."""
global _hostprog
if _hostprog is None:
import re
_hostprog = re.compile('^//([^/]*)(.*)$')
match = _hostprog.match(url)
if match:
return match.group(1, 2)
return (None, url)
_userprog = None
def splituser(host):
"""splituser('user[:passwd]@host[:port]') --> 'user[:passwd]', 'host[:port]'."""
global _userprog
if _userprog is None:
import re
_userprog = re.compile('^(.*)@(.*)$')
match = _userprog.match(host)
if match:
return map(unquote, match.group(1, 2))
return (None, host)
_passwdprog = None
def splitpasswd(user):
"""splitpasswd('user:passwd') -> 'user', 'passwd'."""
global _passwdprog
if _passwdprog is None:
import re
_passwdprog = re.compile('^([^:]*):(.*)$')
match = _passwdprog.match(user)
if match:
return match.group(1, 2)
return (user, None)
_portprog = None
def splitport(host):
"""splitport('host:port') --> 'host', 'port'."""
global _portprog
if _portprog is None:
import re
_portprog = re.compile('^(.*):([0-9]+)$')
match = _portprog.match(host)
if match:
return match.group(1, 2)
return (host, None)
_nportprog = None
def splitnport(host, defport = -1):
"""Split host and port, returning numeric port.
Return given default port if no ':' found; defaults to -1.
Return numerical port if a valid number are found after ':'.
Return None if ':' but not a valid number."""
global _nportprog
if _nportprog is None:
import re
_nportprog = re.compile('^(.*):(.*)$')
match = _nportprog.match(host)
if match:
(host, port) = match.group(1, 2)
try:
if not port:
raise ValueError, 'no digits'
nport = int(port)
except ValueError:
nport = None
return (host, nport)
return (host, defport)
_queryprog = None
def splitquery(url):
"""splitquery('/path?query') --> '/path', 'query'."""
global _queryprog
if _queryprog is None:
import re
_queryprog = re.compile('^(.*)\\?([^?]*)$')
match = _queryprog.match(url)
if match:
return match.group(1, 2)
return (url, None)
_tagprog = None
def splittag(url):
"""splittag('/path#tag') --> '/path', 'tag'."""
global _tagprog
if _tagprog is None:
import re
_tagprog = re.compile('^(.*)#([^#]*)$')
match = _tagprog.match(url)
if match:
return match.group(1, 2)
return (url, None)
def splitattr(url):
"""splitattr('/path;attr1=value1;attr2=value2;...') ->
'/path', ['attr1=value1', 'attr2=value2', ...]."""
words = url.split(';')
return (words[0], words[1:])
_valueprog = None
def splitvalue(attr):
"""splitvalue('attr=value') --> 'attr', 'value'."""
global _valueprog
if _valueprog is None:
import re
_valueprog = re.compile('^([^=]*)=(.*)$')
match = _valueprog.match(attr)
if match:
return match.group(1, 2)
return (attr, None)
def splitgophertype(selector):
"""splitgophertype('/Xselector') --> 'X', 'selector'."""
if selector[:1] == '/' and selector[1:2]:
return (selector[1], selector[2:])
return (None, selector)
_hextochr = dict((lambda [outmost-iterable]: for i in [outmost-iterable]:
('%02x' % i, chr(i)))(range(256)))
_hextochr.update((lambda [outmost-iterable]: for i in [outmost-iterable]:
('%02X' % i, chr(i)))(range(256)))
def unquote(s):
"""unquote('abc%20def') -> 'abc def'."""
res = s.split('%')
for i in xrange(1, len(res)):
item = res[i]
try:
res[i] = _hextochr[item[:2]] + item[2:]
continue
except KeyError:
res[i] = '%' + item
continue
return ''.join(res)
def unquote_plus(s):
"""unquote('%7e/abc+def') -> '~/abc def'"""
s = s.replace('+', ' ')
return unquote(s)
always_safe = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789_.-'
_safemaps = { }
def quote(s, safe = '/'):
'''quote(\'abc def\') -> \'abc%20def\'
Each part of a URL, e.g. the path info, the query, etc., has a
different set of reserved characters that must be quoted.
RFC 2396 Uniform Resource Identifiers (URI): Generic Syntax lists
the following reserved characters.
reserved = ";" | "/" | "?" | ":" | "@" | "&" | "=" | "+" |
"$" | ","
Each of these characters is reserved in some component of a URL,
but not necessarily in all of them.
By default, the quote function is intended for quoting the path
section of a URL. Thus, it will not encode \'/\'. This character
is reserved, but in typical usage the quote function is being
called on a path where the existing slash characters are used as
reserved characters.
'''
cachekey = (safe, always_safe)
try:
safe_map = _safemaps[cachekey]
except KeyError:
safe += always_safe
safe_map = { }
for i in range(256):
c = chr(i)
if not c in safe or c:
pass
safe_map[c] = '%%%02X' % i
_safemaps[cachekey] = safe_map
res = map(safe_map.__getitem__, s)
return ''.join(res)
def quote_plus(s, safe = ''):
"""Quote the query fragment of a URL; replacing ' ' with '+'"""
if ' ' in s:
s = quote(s, safe + ' ')
return s.replace(' ', '+')
return quote(s, safe)
def urlencode(query, doseq = 0):
'''Encode a sequence of two-element tuples or dictionary into a URL query string.
If any values in the query arg are sequences and doseq is true, each
sequence element is converted to a separate parameter.
If the query arg is a sequence of two-element tuples, the order of the
parameters in the output will match the order of parameters in the
input.
'''
if hasattr(query, 'items'):
query = query.items()
else:
try:
if len(query) and not isinstance(query[0], tuple):
raise TypeError
except TypeError:
(ty, va, tb) = sys.exc_info()
raise TypeError, 'not a valid non-string sequence or mapping object', tb
l = []
if not doseq:
for k, v in query:
k = quote_plus(str(k))
v = quote_plus(str(v))
l.append(k + '=' + v)
else:
for k, v in query:
k = quote_plus(str(k))
if isinstance(v, str):
v = quote_plus(v)
l.append(k + '=' + v)
continue
if _is_unicode(v):
v = quote_plus(v.encode('ASCII', 'replace'))
l.append(k + '=' + v)
continue
try:
x = len(v)
except TypeError:
v = quote_plus(str(v))
l.append(k + '=' + v)
continue
for elt in v:
l.append(k + '=' + quote_plus(str(elt)))
return '&'.join(l)
def getproxies_environment():
'''Return a dictionary of scheme -> proxy server URL mappings.
Scan the environment for variables named <scheme>_proxy;
this seems to be the standard convention. If you need a
different way, you can pass a proxies dictionary to the
[Fancy]URLopener constructor.
'''
proxies = { }
for name, value in os.environ.items():
name = name.lower()
if value and name[-6:] == '_proxy':
proxies[name[:-6]] = value
continue
return proxies
if sys.platform == 'darwin':
def getproxies_internetconfig():
'''Return a dictionary of scheme -> proxy server URL mappings.
By convention the mac uses Internet Config to store
proxies. An HTTP proxy, for instance, is stored under
the HttpProxy key.
'''
try:
import ic as ic
except ImportError:
return { }
try:
config = ic.IC()
except ic.error:
return { }
proxies = { }
if 'UseHTTPProxy' in config and config['UseHTTPProxy']:
try:
value = config['HTTPProxyHost']
except ic.error:
pass
proxies['http'] = 'http://%s' % value
return proxies
def proxy_bypass(x):
return 0
def getproxies():
if not getproxies_environment():
pass
return getproxies_internetconfig()
elif os.name == 'nt':
def getproxies_registry():
'''Return a dictionary of scheme -> proxy server URL mappings.
Win32 uses the registry to store proxies.
'''
proxies = { }
try:
import _winreg as _winreg
except ImportError:
return proxies
try:
internetSettings = _winreg.OpenKey(_winreg.HKEY_CURRENT_USER, 'Software\\Microsoft\\Windows\\CurrentVersion\\Internet Settings')
proxyEnable = _winreg.QueryValueEx(internetSettings, 'ProxyEnable')[0]
if proxyEnable:
proxyServer = str(_winreg.QueryValueEx(internetSettings, 'ProxyServer')[0])
if '=' in proxyServer:
for p in proxyServer.split(';'):
(protocol, address) = p.split('=', 1)
import re
if not re.match('^([^/:]+)://', address):
address = '%s://%s' % (protocol, address)
proxies[protocol] = address
elif proxyServer[:5] == 'http:':
proxies['http'] = proxyServer
else:
proxies['http'] = 'http://%s' % proxyServer
proxies['ftp'] = 'ftp://%s' % proxyServer
internetSettings.Close()
except (WindowsError, ValueError, TypeError):
pass
return proxies
def getproxies():
'''Return a dictionary of scheme -> proxy server URL mappings.
Returns settings gathered from the environment, if specified,
or the registry.
'''
if not getproxies_environment():
pass
return getproxies_registry()
def proxy_bypass(host):
try:
import _winreg
import re
except ImportError:
return 0
try:
internetSettings = _winreg.OpenKey(_winreg.HKEY_CURRENT_USER, 'Software\\Microsoft\\Windows\\CurrentVersion\\Internet Settings')
proxyEnable = _winreg.QueryValueEx(internetSettings, 'ProxyEnable')[0]
proxyOverride = str(_winreg.QueryValueEx(internetSettings, 'ProxyOverride')[0])
except WindowsError:
return 0
if not proxyEnable or not proxyOverride:
return 0
host = [
host]
try:
addr = socket.gethostbyname(host[0])
if addr != host:
host.append(addr)
except socket.error:
pass
proxyOverride = proxyOverride.split(';')
i = 0
while i < len(proxyOverride):
if proxyOverride[i] == '<local>':
proxyOverride[i:i + 1] = [
'localhost',
'127.0.0.1',
socket.gethostname(),
socket.gethostbyname(socket.gethostname())]
i += 1
for test in proxyOverride:
test = test.replace('.', '\\.')
test = test.replace('*', '.*')
test = test.replace('?', '.')
for val in host:
if re.match(test, val, re.I):
return 1
continue
return 0
else:
getproxies = getproxies_environment
def proxy_bypass(host):
return 0
def test1():
s = ''
for i in range(256):
s = s + chr(i)
s = s * 4
t0 = time.time()
qs = quote(s)
uqs = unquote(qs)
t1 = time.time()
if uqs != s:
print 'Wrong!'
print repr(s)
print repr(qs)
print repr(uqs)
print round(t1 - t0, 3), 'sec'
def reporthook(blocknum, blocksize, totalsize):
print 'Block number: %d, Block size: %d, Total size: %d' % (blocknum, blocksize, totalsize)
def test(args = []):
if not args:
args = [
'/etc/passwd',
'file:/etc/passwd',
'file://localhost/etc/passwd',
'ftp://ftp.python.org/pub/python/README',
'http://www.python.org/index.html']
if hasattr(URLopener, 'open_https'):
args.append('https://synergy.as.cmu.edu/~geek/')
try:
for url in args:
print '-' * 10, url, '-' * 10
(fn, h) = urlretrieve(url, None, reporthook)
print fn
if h:
print '======'
for k in h.keys():
print k + ':', h[k]
print '======'
fp = open(fn, 'rb')
data = fp.read()
del fp
if '\r' in data:
table = string.maketrans('', '')
data = data.translate(table, '\r')
print data
(fn, h) = (None, None)
print '-' * 40
finally:
urlcleanup()
def main():
import getopt as getopt
import sys
try:
(opts, args) = getopt.getopt(sys.argv[1:], 'th')
except getopt.error:
msg = None
print msg
print 'Use -h for help'
return None
t = 0
for o, a in opts:
if o == '-t':
t = t + 1
if o == '-h':
print 'Usage: python urllib.py [-t] [url ...]'
print '-t runs self-test;', 'otherwise, contents of urls are printed'
return None
continue
if t:
if t > 1:
test1()
test(args)
elif not args:
print 'Use -h for help'
for url in args:
print urlopen(url).read(),
if __name__ == '__main__':
main()